import os
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Conv3D, MaxPooling3D, UpSampling3D, concatenate, Conv3DTranspose, BatchNormalization, Dropout, Lambda
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.layers import Activation, MaxPool2D, Concatenate
import scipy
from skimage import transform
from skimage import io
import numpy as np
from matplotlib import pyplot as plt
from tensorflow.keras import backend as K
from tensorflow.keras.utils import to_categorical
from sklearn.model_selection import train_test_split
import tifffile
!pip install patchify
Defaulting to user installation because normal site-packages is not writeable Requirement already satisfied: patchify in ./.local/lib/python3.8/site-packages (0.2.3) Requirement already satisfied: numpy<2,>=1 in /data/software/python-libs/3.0.0/lib/python3.8/site-packages (from patchify) (1.22.3)
def plot_all(image):
index = 0
count = 1
fig = plt.figure(figsize = (15, 30))
for x in range(1, 17):
for y in range(1, 9):
plt.subplot(16, 8, count).axis("off")
plt.title("Count: " + str(count-1))
plt.imshow(image[:,:,index])
count += 1
index += 1
''' Read Images/Masks '''
images = tifffile.imread('ivc_filter_images_84.tif')
masks = tifffile.imread("ivc_filter_masks_84.tif")
print(images.shape)
print(masks.shape)
(84, 256, 256, 128) (84, 256, 256, 128)
''' Splits into training/testing '''
x_train_orig, x_test_orig, y_train_orig, y_test_orig = train_test_split(images, masks, test_size=0.20, random_state=7)
print(x_train_orig.shape)
print(y_train_orig.shape)
print(x_test_orig.shape)
print(y_test_orig.shape)
(67, 256, 256, 128) (67, 256, 256, 128) (17, 256, 256, 128) (17, 256, 256, 128)
''' Creating Patches '''
import patchify
from skimage.util.shape import view_as_blocks, view_as_windows
from patchify import patchify, unpatchify
def to_patches(images, masks):
image_patches = []
mask_patches = []
for img in images:
img = img/255
image_patches.append(view_as_blocks(img, (64, 64, 32)))
for msk in masks:
# Normalize to 0 or 1
if msk.max() != 0:
msk = msk / msk.max()
mask_patches.append(view_as_blocks(msk, (64, 64, 32)))
image_patches = np.reshape(image_patches, (-1, 64, 64, 32))
mask_patches = np.reshape(mask_patches, (-1, 64, 64, 32))
return image_patches, mask_patches
def patches_to_img(patches):
temp = np.reshape(patches, (4, 4, 4, 64, 64, 32))
end = unpatchify(temp, (256, 256, 128))
return end
''' Split into Patches '''
x1_patches, y1_patches = to_patches(x_train_orig, y_train_orig)
x2_patches, y2_patches = to_patches(x_test_orig, y_test_orig)
print(np.asarray(x1_patches).shape)
print(np.asarray(y1_patches).shape)
(4288, 64, 64, 32) (4288, 64, 64, 32)
''' Separate the IVCF Patches from non-IVCF Patches'''
x1_zeros = [] # holds the background patches in x_train
x1_ones = [] # holds the ivcf patches in x_train
y1_zeros = [] # holds the background patches in y_train
y1_ones = [] # holds the ivcf patches in y_train
x2_zeros = [] # Same as above but for testing set
x2_ones = []
y2_zeros = []
y2_ones = []
for index in range(np.asarray(x1_patches).shape[0]):
if y1_patches[index].max() == 1:
x1_ones.append(x1_patches[index])
y1_ones.append(y1_patches[index])
else:
x1_zeros.append(x1_patches[index])
y1_zeros.append(y1_patches[index])
for index in range(np.asarray(x2_patches).shape[0]):
if y2_patches[index].max() == 1:
x2_ones.append(x2_patches[index])
y2_ones.append(y2_patches[index])
else:
x2_zeros.append(x2_patches[index])
y2_zeros.append(y2_patches[index])
print(np.asarray(x1_zeros).shape)
print(np.asarray(x1_ones).shape)
print(np.asarray(y1_zeros).shape)
print(np.asarray(y1_ones).shape)
(4113, 64, 64, 32) (175, 64, 64, 32) (4113, 64, 64, 32) (175, 64, 64, 32)
print(np.asarray(x2_zeros).shape)
print(np.asarray(x2_ones).shape)
print(np.asarray(y2_zeros).shape)
print(np.asarray(y2_ones).shape)
(1035, 64, 64, 32) (53, 64, 64, 32) (1035, 64, 64, 32) (53, 64, 64, 32)
''' Create Training/Testing Set with 1:1 Ratio'''
max_len_train = np.asarray(x1_ones).shape[0]
max_len_test = np.asarray(x2_ones).shape[0]
x_train = x1_ones
y_train = y1_ones
# Thresh value for modulus
thresh1 = int(np.asarray(x1_zeros).shape[0] / np.asarray(x1_ones).shape[0])
thresh2 = int(np.asarray(x2_zeros).shape[0] / np.asarray(x2_ones).shape[0])
print(thresh1, thresh2)
for i, val in enumerate(x1_zeros):
if i % thresh1 == 0 and np.asarray(x_train).shape[0] < max_len_train*2:
x_train.append(x1_zeros[i])
y_train.append(y1_zeros[i])
x_test = x2_ones
y_test = y2_ones
for i, val in enumerate(x2_zeros):
if i % thresh2 == 0 and np.asarray(x_test).shape[0] < max_len_test*2:
x_test.append(x2_zeros[i])
y_test.append(y2_zeros[i])
23 19
''' Expand_dims and One Hot Encoding'''
x_train = np.expand_dims(np.asarray(x_train), axis = -1)
x_test = np.expand_dims(np.asarray(x_test), axis = -1)
y_train = to_categorical(np.asarray(y_train))
y_test = to_categorical(np.asarray(y_test))
print(np.asarray(x_train).shape)
print(np.asarray(y_train).shape)
print(np.asarray(x_test).shape)
print(np.asarray(y_test).shape)
(350, 64, 64, 32, 1) (350, 64, 64, 32, 2) (106, 64, 64, 32, 1) (106, 64, 64, 32, 2)
#Define parameters for our model.
channels=1
LR = 0.0001
optim = keras.optimizers.Adam(LR)
def conv_block(input, num_filters):
x = Conv3D(num_filters, 3, padding="same")(input)
x = BatchNormalization()(x) #Not in the original network.
x = Activation("relu")(x)
x = Conv3D(num_filters, 3, padding="same")(x)
x = BatchNormalization()(x) #Not in the original network
x = Activation("relu")(x)
return x
#Encoder block: Conv block followed by maxpooling
def encoder_block(input, num_filters):
x = conv_block(input, num_filters)
p = MaxPooling3D((2, 2, 2))(x)
return x, p
#Decoder block
#skip features gets input from encoder for concatenation
def decoder_block(input, skip_features, num_filters):
x = Conv3DTranspose(num_filters, (2, 2, 2), strides=2, padding="same")(input)
x = Concatenate()([x, skip_features])
x = conv_block(x, num_filters)
return x
#Build Unet using the blocks
def build_unet(input_shape, n_classes):
inputs = Input(input_shape)
s1, p1 = encoder_block(inputs, 32)
s2, p2 = encoder_block(p1, 64)
# s3, p3 = encoder_block(p2, 128)
# s4, p4 = encoder_block(p3, 256)
b1 = conv_block(p2, 128) #Bridge
# d1 = decoder_block(b1, s4, 256)
# d2 = decoder_block(b1, s3, 128)
d3 = decoder_block(b1, s2, 64)
d4 = decoder_block(d3, s1, 32)
if n_classes == 1: #Binary
activation = 'sigmoid'
else:
activation = 'softmax'
outputs = Conv3D(n_classes, 1, padding="same", activation=activation)(d4) #Change the activation based on n_classes
print(activation)
model = Model(inputs, outputs, name="U-Net")
return model
METRICS = [
tf.keras.metrics.TruePositives(name='tp'),
tf.keras.metrics.FalsePositives(name='fp'),
tf.keras.metrics.TrueNegatives(name='tn'),
tf.keras.metrics.FalseNegatives(name='fn'),
tf.keras.metrics.BinaryAccuracy(name='accuracy'),
tf.keras.metrics.Precision(name='precision'),
tf.keras.metrics.Recall(name='recall'),
tf.keras.metrics.AUC(name='auc'),
tf.keras.metrics.AUC(name='prc', curve='PR'), # precision-recall curve
]
model = build_unet((64, 64, 32, 1), n_classes=2)
model.compile(optimizer = optim, loss=tf.keras.losses.CategoricalCrossentropy(), metrics=METRICS)
print(model.summary())
softmax
Model: "U-Net"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 64, 64, 32, 0
__________________________________________________________________________________________________
conv3d (Conv3D) (None, 64, 64, 32, 3 896 input_1[0][0]
__________________________________________________________________________________________________
batch_normalization (BatchNorma (None, 64, 64, 32, 3 128 conv3d[0][0]
__________________________________________________________________________________________________
activation (Activation) (None, 64, 64, 32, 3 0 batch_normalization[0][0]
__________________________________________________________________________________________________
conv3d_1 (Conv3D) (None, 64, 64, 32, 3 27680 activation[0][0]
__________________________________________________________________________________________________
batch_normalization_1 (BatchNor (None, 64, 64, 32, 3 128 conv3d_1[0][0]
__________________________________________________________________________________________________
activation_1 (Activation) (None, 64, 64, 32, 3 0 batch_normalization_1[0][0]
__________________________________________________________________________________________________
max_pooling3d (MaxPooling3D) (None, 32, 32, 16, 3 0 activation_1[0][0]
__________________________________________________________________________________________________
conv3d_2 (Conv3D) (None, 32, 32, 16, 6 55360 max_pooling3d[0][0]
__________________________________________________________________________________________________
batch_normalization_2 (BatchNor (None, 32, 32, 16, 6 256 conv3d_2[0][0]
__________________________________________________________________________________________________
activation_2 (Activation) (None, 32, 32, 16, 6 0 batch_normalization_2[0][0]
__________________________________________________________________________________________________
conv3d_3 (Conv3D) (None, 32, 32, 16, 6 110656 activation_2[0][0]
__________________________________________________________________________________________________
batch_normalization_3 (BatchNor (None, 32, 32, 16, 6 256 conv3d_3[0][0]
__________________________________________________________________________________________________
activation_3 (Activation) (None, 32, 32, 16, 6 0 batch_normalization_3[0][0]
__________________________________________________________________________________________________
max_pooling3d_1 (MaxPooling3D) (None, 16, 16, 8, 64 0 activation_3[0][0]
__________________________________________________________________________________________________
conv3d_4 (Conv3D) (None, 16, 16, 8, 12 221312 max_pooling3d_1[0][0]
__________________________________________________________________________________________________
batch_normalization_4 (BatchNor (None, 16, 16, 8, 12 512 conv3d_4[0][0]
__________________________________________________________________________________________________
activation_4 (Activation) (None, 16, 16, 8, 12 0 batch_normalization_4[0][0]
__________________________________________________________________________________________________
conv3d_5 (Conv3D) (None, 16, 16, 8, 12 442496 activation_4[0][0]
__________________________________________________________________________________________________
batch_normalization_5 (BatchNor (None, 16, 16, 8, 12 512 conv3d_5[0][0]
__________________________________________________________________________________________________
activation_5 (Activation) (None, 16, 16, 8, 12 0 batch_normalization_5[0][0]
__________________________________________________________________________________________________
conv3d_transpose (Conv3DTranspo (None, 32, 32, 16, 6 65600 activation_5[0][0]
__________________________________________________________________________________________________
concatenate (Concatenate) (None, 32, 32, 16, 1 0 conv3d_transpose[0][0]
activation_3[0][0]
__________________________________________________________________________________________________
conv3d_6 (Conv3D) (None, 32, 32, 16, 6 221248 concatenate[0][0]
__________________________________________________________________________________________________
batch_normalization_6 (BatchNor (None, 32, 32, 16, 6 256 conv3d_6[0][0]
__________________________________________________________________________________________________
activation_6 (Activation) (None, 32, 32, 16, 6 0 batch_normalization_6[0][0]
__________________________________________________________________________________________________
conv3d_7 (Conv3D) (None, 32, 32, 16, 6 110656 activation_6[0][0]
__________________________________________________________________________________________________
batch_normalization_7 (BatchNor (None, 32, 32, 16, 6 256 conv3d_7[0][0]
__________________________________________________________________________________________________
activation_7 (Activation) (None, 32, 32, 16, 6 0 batch_normalization_7[0][0]
__________________________________________________________________________________________________
conv3d_transpose_1 (Conv3DTrans (None, 64, 64, 32, 3 16416 activation_7[0][0]
__________________________________________________________________________________________________
concatenate_1 (Concatenate) (None, 64, 64, 32, 6 0 conv3d_transpose_1[0][0]
activation_1[0][0]
__________________________________________________________________________________________________
conv3d_8 (Conv3D) (None, 64, 64, 32, 3 55328 concatenate_1[0][0]
__________________________________________________________________________________________________
batch_normalization_8 (BatchNor (None, 64, 64, 32, 3 128 conv3d_8[0][0]
__________________________________________________________________________________________________
activation_8 (Activation) (None, 64, 64, 32, 3 0 batch_normalization_8[0][0]
__________________________________________________________________________________________________
conv3d_9 (Conv3D) (None, 64, 64, 32, 3 27680 activation_8[0][0]
__________________________________________________________________________________________________
batch_normalization_9 (BatchNor (None, 64, 64, 32, 3 128 conv3d_9[0][0]
__________________________________________________________________________________________________
activation_9 (Activation) (None, 64, 64, 32, 3 0 batch_normalization_9[0][0]
__________________________________________________________________________________________________
conv3d_10 (Conv3D) (None, 64, 64, 32, 2 66 activation_9[0][0]
==================================================================================================
Total params: 1,357,954
Trainable params: 1,356,674
Non-trainable params: 1,280
__________________________________________________________________________________________________
None
'''Checks'''
print("Input shape", model.input_shape)
print("Output shape", model.output_shape)
print("-------------------")
Input shape (None, 64, 64, 32, 1) Output shape (None, 64, 64, 32, 2) -------------------
history=model.fit(x_train, y_train,
validation_data=(x_test, y_test),
batch_size=1,
epochs=100,
shuffle=True,
verbose=1)
Epoch 1/100 350/350 [==============================] - 24s 57ms/step - loss: 0.1496 - tp: 22760432.8917 - fp: 307872.3647 - tn: 22760432.8917 - fn: 307872.3647 - accuracy: 0.9826 - precision: 0.9826 - recall: 0.9826 - auc: 0.9920 - prc: 0.9885 - val_loss: 0.0551 - val_tp: 13754917.0000 - val_fp: 138715.0000 - val_tn: 13754917.0000 - val_fn: 138715.0000 - val_accuracy: 0.9900 - val_precision: 0.9900 - val_recall: 0.9900 - val_auc: 0.9936 - val_prc: 0.9925 Epoch 2/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0481 - tp: 22901040.7635 - fp: 167255.4729 - tn: 22901040.7635 - fn: 167255.4729 - accuracy: 0.9927 - precision: 0.9927 - recall: 0.9927 - auc: 0.9987 - prc: 0.9984 - val_loss: 0.0970 - val_tp: 13308297.0000 - val_fp: 585335.0000 - val_tn: 13308297.0000 - val_fn: 585335.0000 - val_accuracy: 0.9579 - val_precision: 0.9579 - val_recall: 0.9579 - val_auc: 0.9948 - val_prc: 0.9939 Epoch 3/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0372 - tp: 22886107.6581 - fp: 182192.9715 - tn: 22886107.6581 - fn: 182192.9715 - accuracy: 0.9915 - precision: 0.9915 - recall: 0.9915 - auc: 0.9990 - prc: 0.9987 - val_loss: 0.0872 - val_tp: 13310413.0000 - val_fp: 583219.0000 - val_tn: 13310413.0000 - val_fn: 583219.0000 - val_accuracy: 0.9580 - val_precision: 0.9580 - val_recall: 0.9580 - val_auc: 0.9958 - val_prc: 0.9954 Epoch 4/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0261 - tp: 22937332.9202 - fp: 130967.6439 - tn: 22937332.9202 - fn: 130967.6439 - accuracy: 0.9942 - precision: 0.9942 - recall: 0.9942 - auc: 0.9994 - prc: 0.9993 - val_loss: 0.0255 - val_tp: 13809303.0000 - val_fp: 84329.0000 - val_tn: 13809303.0000 - val_fn: 84329.0000 - val_accuracy: 0.9939 - val_precision: 0.9939 - val_recall: 0.9939 - val_auc: 0.9988 - val_prc: 0.9985 Epoch 5/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0232 - tp: 22939500.3761 - fp: 128801.6581 - tn: 22939500.3761 - fn: 128801.6581 - accuracy: 0.9938 - precision: 0.9938 - recall: 0.9938 - auc: 0.9995 - prc: 0.9995 - val_loss: 0.1819 - val_tp: 12530750.0000 - val_fp: 1362882.0000 - val_tn: 12530750.0000 - val_fn: 1362882.0000 - val_accuracy: 0.9019 - val_precision: 0.9019 - val_recall: 0.9019 - val_auc: 0.9796 - val_prc: 0.9801 Epoch 6/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0200 - tp: 22938080.2764 - fp: 130216.5071 - tn: 22938080.2764 - fn: 130216.5071 - accuracy: 0.9943 - precision: 0.9943 - recall: 0.9943 - auc: 0.9996 - prc: 0.9996 - val_loss: 0.0185 - val_tp: 13818924.0000 - val_fp: 74708.0000 - val_tn: 13818924.0000 - val_fn: 74708.0000 - val_accuracy: 0.9946 - val_precision: 0.9946 - val_recall: 0.9946 - val_auc: 0.9993 - val_prc: 0.9991 Epoch 7/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0178 - tp: 22954955.1681 - fp: 113344.7521 - tn: 22954955.1681 - fn: 113344.7521 - accuracy: 0.9950 - precision: 0.9950 - recall: 0.9950 - auc: 0.9997 - prc: 0.9996 - val_loss: 0.0173 - val_tp: 13825614.0000 - val_fp: 68018.0000 - val_tn: 13825614.0000 - val_fn: 68018.0000 - val_accuracy: 0.9951 - val_precision: 0.9951 - val_recall: 0.9951 - val_auc: 0.9994 - val_prc: 0.9993 Epoch 8/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0170 - tp: 22943573.6980 - fp: 124726.6154 - tn: 22943573.6980 - fn: 124726.6154 - accuracy: 0.9945 - precision: 0.9945 - recall: 0.9945 - auc: 0.9997 - prc: 0.9997 - val_loss: 0.0235 - val_tp: 13804553.0000 - val_fp: 89079.0000 - val_tn: 13804553.0000 - val_fn: 89079.0000 - val_accuracy: 0.9936 - val_precision: 0.9936 - val_recall: 0.9936 - val_auc: 0.9987 - val_prc: 0.9984 Epoch 9/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0130 - tp: 22975510.2450 - fp: 92791.7236 - tn: 22975510.2450 - fn: 92791.7236 - accuracy: 0.9959 - precision: 0.9959 - recall: 0.9959 - auc: 0.9998 - prc: 0.9998 - val_loss: 0.0320 - val_tp: 13793613.0000 - val_fp: 100019.0000 - val_tn: 13793613.0000 - val_fn: 100019.0000 - val_accuracy: 0.9928 - val_precision: 0.9928 - val_recall: 0.9928 - val_auc: 0.9974 - val_prc: 0.9966 Epoch 10/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0119 - tp: 22979746.6097 - fp: 88551.7436 - tn: 22979746.6097 - fn: 88551.7436 - accuracy: 0.9962 - precision: 0.9962 - recall: 0.9962 - auc: 0.9998 - prc: 0.9998 - val_loss: 0.0168 - val_tp: 13824360.0000 - val_fp: 69272.0000 - val_tn: 13824360.0000 - val_fn: 69272.0000 - val_accuracy: 0.9950 - val_precision: 0.9950 - val_recall: 0.9950 - val_auc: 0.9992 - val_prc: 0.9990 Epoch 11/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0115 - tp: 22982111.1937 - fp: 86187.7664 - tn: 22982111.1937 - fn: 86187.7664 - accuracy: 0.9960 - precision: 0.9960 - recall: 0.9960 - auc: 0.9999 - prc: 0.9998 - val_loss: 0.0162 - val_tp: 13827316.0000 - val_fp: 66316.0000 - val_tn: 13827316.0000 - val_fn: 66316.0000 - val_accuracy: 0.9952 - val_precision: 0.9952 - val_recall: 0.9952 - val_auc: 0.9992 - val_prc: 0.9989 Epoch 12/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0113 - tp: 22978830.1225 - fp: 89472.4530 - tn: 22978830.1225 - fn: 89472.4530 - accuracy: 0.9960 - precision: 0.9960 - recall: 0.9960 - auc: 0.9999 - prc: 0.9998 - val_loss: 0.0211 - val_tp: 13817284.0000 - val_fp: 76348.0000 - val_tn: 13817284.0000 - val_fn: 76348.0000 - val_accuracy: 0.9945 - val_precision: 0.9945 - val_recall: 0.9945 - val_auc: 0.9986 - val_prc: 0.9982 Epoch 13/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0093 - tp: 22988711.7236 - fp: 79586.4558 - tn: 22988711.7236 - fn: 79586.4558 - accuracy: 0.9967 - precision: 0.9967 - recall: 0.9967 - auc: 0.9999 - prc: 0.9999 - val_loss: 0.0162 - val_tp: 13826227.0000 - val_fp: 67405.0000 - val_tn: 13826227.0000 - val_fn: 67405.0000 - val_accuracy: 0.9951 - val_precision: 0.9951 - val_recall: 0.9951 - val_auc: 0.9992 - val_prc: 0.9989 Epoch 14/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0091 - tp: 22993034.9573 - fp: 75263.9202 - tn: 22993034.9573 - fn: 75263.9202 - accuracy: 0.9968 - precision: 0.9968 - recall: 0.9968 - auc: 0.9998 - prc: 0.9998 - val_loss: 0.0222 - val_tp: 13811894.0000 - val_fp: 81738.0000 - val_tn: 13811894.0000 - val_fn: 81738.0000 - val_accuracy: 0.9941 - val_precision: 0.9941 - val_recall: 0.9941 - val_auc: 0.9984 - val_prc: 0.9980 Epoch 15/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0083 - tp: 22997843.8063 - fp: 70457.2137 - tn: 22997843.8063 - fn: 70457.2137 - accuracy: 0.9969 - precision: 0.9969 - recall: 0.9969 - auc: 0.9999 - prc: 0.9999 - val_loss: 0.0190 - val_tp: 13823618.0000 - val_fp: 70014.0000 - val_tn: 13823618.0000 - val_fn: 70014.0000 - val_accuracy: 0.9950 - val_precision: 0.9950 - val_recall: 0.9950 - val_auc: 0.9986 - val_prc: 0.9982 Epoch 16/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0071 - tp: 23005643.9516 - fp: 62651.8063 - tn: 23005643.9516 - fn: 62651.8063 - accuracy: 0.9974 - precision: 0.9974 - recall: 0.9974 - auc: 0.9999 - prc: 0.9999 - val_loss: 0.0181 - val_tp: 13822025.0000 - val_fp: 71607.0000 - val_tn: 13822025.0000 - val_fn: 71607.0000 - val_accuracy: 0.9948 - val_precision: 0.9948 - val_recall: 0.9948 - val_auc: 0.9989 - val_prc: 0.9986 Epoch 17/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0066 - tp: 23008332.0627 - fp: 59972.3675 - tn: 23008332.0627 - fn: 59972.3675 - accuracy: 0.9975 - precision: 0.9975 - recall: 0.9975 - auc: 0.9999 - prc: 0.9999 - val_loss: 0.0280 - val_tp: 13812753.0000 - val_fp: 80879.0000 - val_tn: 13812753.0000 - val_fn: 80879.0000 - val_accuracy: 0.9942 - val_precision: 0.9942 - val_recall: 0.9942 - val_auc: 0.9973 - val_prc: 0.9965 Epoch 18/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0070 - tp: 23010133.3362 - fp: 58165.4530 - tn: 23010133.3362 - fn: 58165.4530 - accuracy: 0.9973 - precision: 0.9973 - recall: 0.9973 - auc: 0.9999 - prc: 0.9999 - val_loss: 0.0162 - val_tp: 13831490.0000 - val_fp: 62142.0000 - val_tn: 13831490.0000 - val_fn: 62142.0000 - val_accuracy: 0.9955 - val_precision: 0.9955 - val_recall: 0.9955 - val_auc: 0.9989 - val_prc: 0.9986 Epoch 19/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0069 - tp: 23004443.0741 - fp: 63854.1339 - tn: 23004443.0741 - fn: 63854.1339 - accuracy: 0.9974 - precision: 0.9974 - recall: 0.9974 - auc: 0.9999 - prc: 0.9999 - val_loss: 0.0210 - val_tp: 13826265.0000 - val_fp: 67367.0000 - val_tn: 13826265.0000 - val_fn: 67367.0000 - val_accuracy: 0.9952 - val_precision: 0.9952 - val_recall: 0.9952 - val_auc: 0.9983 - val_prc: 0.9978 Epoch 20/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0076 - tp: 23000050.3248 - fp: 68246.1425 - tn: 23000050.3248 - fn: 68246.1425 - accuracy: 0.9973 - precision: 0.9973 - recall: 0.9973 - auc: 0.9998 - prc: 0.9998 - val_loss: 0.0169 - val_tp: 13827770.0000 - val_fp: 65862.0000 - val_tn: 13827770.0000 - val_fn: 65862.0000 - val_accuracy: 0.9953 - val_precision: 0.9953 - val_recall: 0.9953 - val_auc: 0.9990 - val_prc: 0.9986 Epoch 21/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0068 - tp: 23011514.1538 - fp: 56780.3476 - tn: 23011514.1538 - fn: 56780.3476 - accuracy: 0.9975 - precision: 0.9975 - recall: 0.9975 - auc: 0.9999 - prc: 0.9999 - val_loss: 0.0177 - val_tp: 13828857.0000 - val_fp: 64775.0000 - val_tn: 13828857.0000 - val_fn: 64775.0000 - val_accuracy: 0.9953 - val_precision: 0.9953 - val_recall: 0.9953 - val_auc: 0.9988 - val_prc: 0.9985 Epoch 22/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0045 - tp: 23027395.9288 - fp: 40901.3134 - tn: 23027395.9288 - fn: 40901.3134 - accuracy: 0.9983 - precision: 0.9983 - recall: 0.9983 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0185 - val_tp: 13832646.0000 - val_fp: 60986.0000 - val_tn: 13832646.0000 - val_fn: 60986.0000 - val_accuracy: 0.9956 - val_precision: 0.9956 - val_recall: 0.9956 - val_auc: 0.9986 - val_prc: 0.9981 Epoch 23/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0052 - tp: 23024140.1225 - fp: 44158.3989 - tn: 23024140.1225 - fn: 44158.3989 - accuracy: 0.9980 - precision: 0.9980 - recall: 0.9980 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0188 - val_tp: 13829495.0000 - val_fp: 64137.0000 - val_tn: 13829495.0000 - val_fn: 64137.0000 - val_accuracy: 0.9954 - val_precision: 0.9954 - val_recall: 0.9954 - val_auc: 0.9986 - val_prc: 0.9982 Epoch 24/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0047 - tp: 23026346.6410 - fp: 41950.1311 - tn: 23026346.6410 - fn: 41950.1311 - accuracy: 0.9982 - precision: 0.9982 - recall: 0.9982 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0307 - val_tp: 13758166.0000 - val_fp: 135466.0000 - val_tn: 13758166.0000 - val_fn: 135466.0000 - val_accuracy: 0.9902 - val_precision: 0.9902 - val_recall: 0.9902 - val_auc: 0.9985 - val_prc: 0.9981 Epoch 25/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0049 - tp: 23025842.0826 - fp: 42453.7322 - tn: 23025842.0826 - fn: 42453.7322 - accuracy: 0.9982 - precision: 0.9982 - recall: 0.9982 - auc: 0.9999 - prc: 0.9999 - val_loss: 0.0192 - val_tp: 13834029.0000 - val_fp: 59603.0000 - val_tn: 13834029.0000 - val_fn: 59603.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9985 - val_prc: 0.9980 Epoch 26/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0043 - tp: 23028640.0883 - fp: 39660.2764 - tn: 23028640.0883 - fn: 39660.2764 - accuracy: 0.9983 - precision: 0.9983 - recall: 0.9983 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0294 - val_tp: 13815143.0000 - val_fp: 78489.0000 - val_tn: 13815143.0000 - val_fn: 78489.0000 - val_accuracy: 0.9944 - val_precision: 0.9944 - val_recall: 0.9944 - val_auc: 0.9973 - val_prc: 0.9965 Epoch 27/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0038 - tp: 23032865.2678 - fp: 35434.3675 - tn: 23032865.2678 - fn: 35434.3675 - accuracy: 0.9985 - precision: 0.9985 - recall: 0.9985 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0223 - val_tp: 13826868.0000 - val_fp: 66764.0000 - val_tn: 13826868.0000 - val_fn: 66764.0000 - val_accuracy: 0.9952 - val_precision: 0.9952 - val_recall: 0.9952 - val_auc: 0.9982 - val_prc: 0.9976 Epoch 28/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0041 - tp: 23032204.5385 - fp: 36090.8632 - tn: 23032204.5385 - fn: 36090.8632 - accuracy: 0.9984 - precision: 0.9984 - recall: 0.9984 - auc: 1.0000 - prc: 0.9999 - val_loss: 0.0210 - val_tp: 13830708.0000 - val_fp: 62924.0000 - val_tn: 13830708.0000 - val_fn: 62924.0000 - val_accuracy: 0.9955 - val_precision: 0.9955 - val_recall: 0.9955 - val_auc: 0.9983 - val_prc: 0.9978 Epoch 29/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0041 - tp: 23029299.9487 - fp: 38995.3989 - tn: 23029299.9487 - fn: 38995.3989 - accuracy: 0.9984 - precision: 0.9984 - recall: 0.9984 - auc: 1.0000 - prc: 0.9999 - val_loss: 0.0204 - val_tp: 13825606.0000 - val_fp: 68026.0000 - val_tn: 13825606.0000 - val_fn: 68026.0000 - val_accuracy: 0.9951 - val_precision: 0.9951 - val_recall: 0.9951 - val_auc: 0.9985 - val_prc: 0.9981 Epoch 30/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0043 - tp: 23030295.4729 - fp: 38001.1880 - tn: 23030295.4729 - fn: 38001.1880 - accuracy: 0.9983 - precision: 0.9983 - recall: 0.9983 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0197 - val_tp: 13834921.0000 - val_fp: 58711.0000 - val_tn: 13834921.0000 - val_fn: 58711.0000 - val_accuracy: 0.9958 - val_precision: 0.9958 - val_recall: 0.9958 - val_auc: 0.9984 - val_prc: 0.9979 Epoch 31/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0037 - tp: 23034444.3191 - fp: 33853.7550 - tn: 23034444.3191 - fn: 33853.7550 - accuracy: 0.9986 - precision: 0.9986 - recall: 0.9986 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0253 - val_tp: 13778864.0000 - val_fp: 114768.0000 - val_tn: 13778864.0000 - val_fn: 114768.0000 - val_accuracy: 0.9917 - val_precision: 0.9917 - val_recall: 0.9917 - val_auc: 0.9989 - val_prc: 0.9986 Epoch 32/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0078 - tp: 23006097.7151 - fp: 62201.4074 - tn: 23006097.7151 - fn: 62201.4074 - accuracy: 0.9969 - precision: 0.9969 - recall: 0.9969 - auc: 0.9999 - prc: 0.9998 - val_loss: 0.0206 - val_tp: 13831133.0000 - val_fp: 62499.0000 - val_tn: 13831133.0000 - val_fn: 62499.0000 - val_accuracy: 0.9955 - val_precision: 0.9955 - val_recall: 0.9955 - val_auc: 0.9984 - val_prc: 0.9979 Epoch 33/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0030 - tp: 23040354.5356 - fp: 27942.1709 - tn: 23040354.5356 - fn: 27942.1709 - accuracy: 0.9988 - precision: 0.9988 - recall: 0.9988 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0208 - val_tp: 13834861.0000 - val_fp: 58771.0000 - val_tn: 13834861.0000 - val_fn: 58771.0000 - val_accuracy: 0.9958 - val_precision: 0.9958 - val_recall: 0.9958 - val_auc: 0.9983 - val_prc: 0.9978 Epoch 34/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0028 - tp: 23041842.4416 - fp: 26455.0000 - tn: 23041842.4416 - fn: 26455.0000 - accuracy: 0.9989 - precision: 0.9989 - recall: 0.9989 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0199 - val_tp: 13831666.0000 - val_fp: 61966.0000 - val_tn: 13831666.0000 - val_fn: 61966.0000 - val_accuracy: 0.9955 - val_precision: 0.9955 - val_recall: 0.9955 - val_auc: 0.9985 - val_prc: 0.9980 Epoch 35/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0024 - tp: 23043482.5926 - fp: 24817.9630 - tn: 23043482.5926 - fn: 24817.9630 - accuracy: 0.9991 - precision: 0.9991 - recall: 0.9991 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0200 - val_tp: 13835736.0000 - val_fp: 57896.0000 - val_tn: 13835736.0000 - val_fn: 57896.0000 - val_accuracy: 0.9958 - val_precision: 0.9958 - val_recall: 0.9958 - val_auc: 0.9984 - val_prc: 0.9979 Epoch 36/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0027 - tp: 23042387.4815 - fp: 25909.9060 - tn: 23042387.4815 - fn: 25909.9060 - accuracy: 0.9990 - precision: 0.9990 - recall: 0.9990 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0265 - val_tp: 13827257.0000 - val_fp: 66375.0000 - val_tn: 13827257.0000 - val_fn: 66375.0000 - val_accuracy: 0.9952 - val_precision: 0.9952 - val_recall: 0.9952 - val_auc: 0.9977 - val_prc: 0.9970 Epoch 37/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0028 - tp: 23041855.1652 - fp: 26438.5755 - tn: 23041855.1652 - fn: 26438.5755 - accuracy: 0.9989 - precision: 0.9989 - recall: 0.9989 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0386 - val_tp: 13815414.0000 - val_fp: 78218.0000 - val_tn: 13815414.0000 - val_fn: 78218.0000 - val_accuracy: 0.9944 - val_precision: 0.9944 - val_recall: 0.9944 - val_auc: 0.9966 - val_prc: 0.9956 Epoch 38/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0034 - tp: 23036199.9630 - fp: 32100.5812 - tn: 23036199.9630 - fn: 32100.5812 - accuracy: 0.9987 - precision: 0.9987 - recall: 0.9987 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0236 - val_tp: 13830426.0000 - val_fp: 63206.0000 - val_tn: 13830426.0000 - val_fn: 63206.0000 - val_accuracy: 0.9955 - val_precision: 0.9955 - val_recall: 0.9955 - val_auc: 0.9980 - val_prc: 0.9974 Epoch 39/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0030 - tp: 23038890.1026 - fp: 29407.9031 - tn: 23038890.1026 - fn: 29407.9031 - accuracy: 0.9988 - precision: 0.9988 - recall: 0.9988 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0223 - val_tp: 13827541.0000 - val_fp: 66091.0000 - val_tn: 13827541.0000 - val_fn: 66091.0000 - val_accuracy: 0.9952 - val_precision: 0.9952 - val_recall: 0.9952 - val_auc: 0.9983 - val_prc: 0.9978 Epoch 40/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0031 - tp: 23040561.9972 - fp: 27730.7550 - tn: 23040561.9972 - fn: 27730.7550 - accuracy: 0.9988 - precision: 0.9988 - recall: 0.9988 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0256 - val_tp: 13828643.0000 - val_fp: 64989.0000 - val_tn: 13828643.0000 - val_fn: 64989.0000 - val_accuracy: 0.9953 - val_precision: 0.9953 - val_recall: 0.9953 - val_auc: 0.9979 - val_prc: 0.9972 Epoch 41/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0032 - tp: 23039664.1595 - fp: 28635.0199 - tn: 23039664.1595 - fn: 28635.0199 - accuracy: 0.9988 - precision: 0.9988 - recall: 0.9988 - auc: 1.0000 - prc: 0.9999 - val_loss: 0.0250 - val_tp: 13827403.0000 - val_fp: 66229.0000 - val_tn: 13827403.0000 - val_fn: 66229.0000 - val_accuracy: 0.9952 - val_precision: 0.9952 - val_recall: 0.9952 - val_auc: 0.9981 - val_prc: 0.9975 Epoch 42/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0029 - tp: 23043745.1425 - fp: 24551.0114 - tn: 23043745.1425 - fn: 24551.0114 - accuracy: 0.9989 - precision: 0.9989 - recall: 0.9989 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0262 - val_tp: 13827186.0000 - val_fp: 66446.0000 - val_tn: 13827186.0000 - val_fn: 66446.0000 - val_accuracy: 0.9952 - val_precision: 0.9952 - val_recall: 0.9952 - val_auc: 0.9978 - val_prc: 0.9972 Epoch 43/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0025 - tp: 23045820.5100 - fp: 22478.5499 - tn: 23045820.5100 - fn: 22478.5499 - accuracy: 0.9990 - precision: 0.9990 - recall: 0.9990 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0255 - val_tp: 13832600.0000 - val_fp: 61032.0000 - val_tn: 13832600.0000 - val_fn: 61032.0000 - val_accuracy: 0.9956 - val_precision: 0.9956 - val_recall: 0.9956 - val_auc: 0.9979 - val_prc: 0.9972 Epoch 44/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0026 - tp: 23042796.3276 - fp: 25497.3105 - tn: 23042796.3276 - fn: 25497.3105 - accuracy: 0.9989 - precision: 0.9989 - recall: 0.9989 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0238 - val_tp: 13834085.0000 - val_fp: 59547.0000 - val_tn: 13834085.0000 - val_fn: 59547.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9981 - val_prc: 0.9975 Epoch 45/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0027 - tp: 23045045.3362 - fp: 23255.7778 - tn: 23045045.3362 - fn: 23255.7778 - accuracy: 0.9989 - precision: 0.9989 - recall: 0.9989 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0248 - val_tp: 13829666.0000 - val_fp: 63966.0000 - val_tn: 13829666.0000 - val_fn: 63966.0000 - val_accuracy: 0.9954 - val_precision: 0.9954 - val_recall: 0.9954 - val_auc: 0.9981 - val_prc: 0.9975 Epoch 46/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0025 - tp: 23045198.4160 - fp: 23102.0940 - tn: 23045198.4160 - fn: 23102.0940 - accuracy: 0.9990 - precision: 0.9990 - recall: 0.9990 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0250 - val_tp: 13834870.0000 - val_fp: 58762.0000 - val_tn: 13834870.0000 - val_fn: 58762.0000 - val_accuracy: 0.9958 - val_precision: 0.9958 - val_recall: 0.9958 - val_auc: 0.9980 - val_prc: 0.9974 Epoch 47/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0025 - tp: 23044539.4986 - fp: 23763.3761 - tn: 23044539.4986 - fn: 23763.3761 - accuracy: 0.9990 - precision: 0.9990 - recall: 0.9990 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0236 - val_tp: 13832076.0000 - val_fp: 61556.0000 - val_tn: 13832076.0000 - val_fn: 61556.0000 - val_accuracy: 0.9956 - val_precision: 0.9956 - val_recall: 0.9956 - val_auc: 0.9981 - val_prc: 0.9976 Epoch 48/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0024 - tp: 23045836.9145 - fp: 22459.5214 - tn: 23045836.9145 - fn: 22459.5214 - accuracy: 0.9990 - precision: 0.9990 - recall: 0.9990 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0315 - val_tp: 13828188.0000 - val_fp: 65444.0000 - val_tn: 13828188.0000 - val_fn: 65444.0000 - val_accuracy: 0.9953 - val_precision: 0.9953 - val_recall: 0.9953 - val_auc: 0.9973 - val_prc: 0.9965 Epoch 49/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0025 - tp: 23046585.0285 - fp: 21713.2678 - tn: 23046585.0285 - fn: 21713.2678 - accuracy: 0.9990 - precision: 0.9990 - recall: 0.9990 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0274 - val_tp: 13829900.0000 - val_fp: 63732.0000 - val_tn: 13829900.0000 - val_fn: 63732.0000 - val_accuracy: 0.9954 - val_precision: 0.9954 - val_recall: 0.9954 - val_auc: 0.9977 - val_prc: 0.9969 Epoch 50/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0021 - tp: 23050115.8519 - fp: 18188.2593 - tn: 23050115.8519 - fn: 18188.2593 - accuracy: 0.9992 - precision: 0.9992 - recall: 0.9992 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0256 - val_tp: 13834348.0000 - val_fp: 59284.0000 - val_tn: 13834348.0000 - val_fn: 59284.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9979 - val_prc: 0.9973 Epoch 51/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0019 - tp: 23049435.1994 - fp: 18863.9430 - tn: 23049435.1994 - fn: 18863.9430 - accuracy: 0.9992 - precision: 0.9992 - recall: 0.9992 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0272 - val_tp: 13833493.0000 - val_fp: 60139.0000 - val_tn: 13833493.0000 - val_fn: 60139.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9978 - val_prc: 0.9971 Epoch 52/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0018 - tp: 23050533.6980 - fp: 17763.8746 - tn: 23050533.6980 - fn: 17763.8746 - accuracy: 0.9993 - precision: 0.9993 - recall: 0.9993 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0355 - val_tp: 13821694.0000 - val_fp: 71938.0000 - val_tn: 13821694.0000 - val_fn: 71938.0000 - val_accuracy: 0.9948 - val_precision: 0.9948 - val_recall: 0.9948 - val_auc: 0.9972 - val_prc: 0.9963 Epoch 53/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0051 - tp: 23035998.8775 - fp: 32301.2821 - tn: 23035998.8775 - fn: 32301.2821 - accuracy: 0.9983 - precision: 0.9983 - recall: 0.9983 - auc: 0.9998 - prc: 0.9998 - val_loss: 0.0260 - val_tp: 13834753.0000 - val_fp: 58879.0000 - val_tn: 13834753.0000 - val_fn: 58879.0000 - val_accuracy: 0.9958 - val_precision: 0.9958 - val_recall: 0.9958 - val_auc: 0.9978 - val_prc: 0.9972 Epoch 54/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0021 - tp: 23049337.4017 - fp: 18957.5726 - tn: 23049337.4017 - fn: 18957.5726 - accuracy: 0.9992 - precision: 0.9992 - recall: 0.9992 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0280 - val_tp: 13831323.0000 - val_fp: 62309.0000 - val_tn: 13831323.0000 - val_fn: 62309.0000 - val_accuracy: 0.9955 - val_precision: 0.9955 - val_recall: 0.9955 - val_auc: 0.9977 - val_prc: 0.9970 Epoch 55/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0018 - tp: 23051160.9972 - fp: 17134.1140 - tn: 23051160.9972 - fn: 17134.1140 - accuracy: 0.9993 - precision: 0.9993 - recall: 0.9993 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0242 - val_tp: 13833833.0000 - val_fp: 59799.0000 - val_tn: 13833833.0000 - val_fn: 59799.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9981 - val_prc: 0.9975 Epoch 56/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0027 - tp: 23047081.8462 - fp: 21219.7607 - tn: 23047081.8462 - fn: 21219.7607 - accuracy: 0.9990 - precision: 0.9990 - recall: 0.9990 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0287 - val_tp: 13832033.0000 - val_fp: 61599.0000 - val_tn: 13832033.0000 - val_fn: 61599.0000 - val_accuracy: 0.9956 - val_precision: 0.9956 - val_recall: 0.9956 - val_auc: 0.9976 - val_prc: 0.9969 Epoch 57/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0020 - tp: 23049286.7293 - fp: 19006.7151 - tn: 23049286.7293 - fn: 19006.7151 - accuracy: 0.9992 - precision: 0.9992 - recall: 0.9992 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0238 - val_tp: 13834849.0000 - val_fp: 58783.0000 - val_tn: 13834849.0000 - val_fn: 58783.0000 - val_accuracy: 0.9958 - val_precision: 0.9958 - val_recall: 0.9958 - val_auc: 0.9982 - val_prc: 0.9977 Epoch 58/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0024 - tp: 23046874.4558 - fp: 21426.5698 - tn: 23046874.4558 - fn: 21426.5698 - accuracy: 0.9991 - precision: 0.9991 - recall: 0.9991 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0267 - val_tp: 13832569.0000 - val_fp: 61063.0000 - val_tn: 13832569.0000 - val_fn: 61063.0000 - val_accuracy: 0.9956 - val_precision: 0.9956 - val_recall: 0.9956 - val_auc: 0.9978 - val_prc: 0.9972 Epoch 59/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0028 - tp: 23041786.3903 - fp: 26514.6211 - tn: 23041786.3903 - fn: 26514.6211 - accuracy: 0.9989 - precision: 0.9989 - recall: 0.9989 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0266 - val_tp: 13829334.0000 - val_fp: 64298.0000 - val_tn: 13829334.0000 - val_fn: 64298.0000 - val_accuracy: 0.9954 - val_precision: 0.9954 - val_recall: 0.9954 - val_auc: 0.9978 - val_prc: 0.9972 Epoch 60/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0016 - tp: 23053253.5442 - fp: 15049.1538 - tn: 23053253.5442 - fn: 15049.1538 - accuracy: 0.9994 - precision: 0.9994 - recall: 0.9994 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0275 - val_tp: 13829799.0000 - val_fp: 63833.0000 - val_tn: 13829799.0000 - val_fn: 63833.0000 - val_accuracy: 0.9954 - val_precision: 0.9954 - val_recall: 0.9954 - val_auc: 0.9977 - val_prc: 0.9970 Epoch 61/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0014 - tp: 23054884.8433 - fp: 13411.0427 - tn: 23054884.8433 - fn: 13411.0427 - accuracy: 0.9994 - precision: 0.9994 - recall: 0.9994 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0316 - val_tp: 13830584.0000 - val_fp: 63048.0000 - val_tn: 13830584.0000 - val_fn: 63048.0000 - val_accuracy: 0.9955 - val_precision: 0.9955 - val_recall: 0.9955 - val_auc: 0.9972 - val_prc: 0.9964 Epoch 62/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0017 - tp: 23052942.6040 - fp: 15353.0114 - tn: 23052942.6040 - fn: 15353.0114 - accuracy: 0.9993 - precision: 0.9993 - recall: 0.9993 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0311 - val_tp: 13826997.0000 - val_fp: 66635.0000 - val_tn: 13826997.0000 - val_fn: 66635.0000 - val_accuracy: 0.9952 - val_precision: 0.9952 - val_recall: 0.9952 - val_auc: 0.9973 - val_prc: 0.9965 Epoch 63/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0018 - tp: 23051397.5271 - fp: 16902.2222 - tn: 23051397.5271 - fn: 16902.2222 - accuracy: 0.9993 - precision: 0.9993 - recall: 0.9993 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0285 - val_tp: 13835295.0000 - val_fp: 58337.0000 - val_tn: 13835295.0000 - val_fn: 58337.0000 - val_accuracy: 0.9958 - val_precision: 0.9958 - val_recall: 0.9958 - val_auc: 0.9977 - val_prc: 0.9970 Epoch 64/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0017 - tp: 23052852.1538 - fp: 15445.8262 - tn: 23052852.1538 - fn: 15445.8262 - accuracy: 0.9993 - precision: 0.9993 - recall: 0.9993 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0287 - val_tp: 13833756.0000 - val_fp: 59876.0000 - val_tn: 13833756.0000 - val_fn: 59876.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9977 - val_prc: 0.9970 Epoch 65/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0017 - tp: 23052019.0969 - fp: 16276.1624 - tn: 23052019.0969 - fn: 16276.1624 - accuracy: 0.9993 - precision: 0.9993 - recall: 0.9993 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0294 - val_tp: 13833170.0000 - val_fp: 60462.0000 - val_tn: 13833170.0000 - val_fn: 60462.0000 - val_accuracy: 0.9956 - val_precision: 0.9956 - val_recall: 0.9956 - val_auc: 0.9976 - val_prc: 0.9969 Epoch 66/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0017 - tp: 23052907.9601 - fp: 15392.5726 - tn: 23052907.9601 - fn: 15392.5726 - accuracy: 0.9994 - precision: 0.9994 - recall: 0.9994 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0294 - val_tp: 13832292.0000 - val_fp: 61340.0000 - val_tn: 13832292.0000 - val_fn: 61340.0000 - val_accuracy: 0.9956 - val_precision: 0.9956 - val_recall: 0.9956 - val_auc: 0.9976 - val_prc: 0.9969 Epoch 67/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0018 - tp: 23050719.7066 - fp: 17585.8974 - tn: 23050719.7066 - fn: 17585.8974 - accuracy: 0.9993 - precision: 0.9993 - recall: 0.9993 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0276 - val_tp: 13833422.0000 - val_fp: 60210.0000 - val_tn: 13833422.0000 - val_fn: 60210.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9979 - val_prc: 0.9972 Epoch 68/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0016 - tp: 23052423.3960 - fp: 15877.8946 - tn: 23052423.3960 - fn: 15877.8946 - accuracy: 0.9993 - precision: 0.9993 - recall: 0.9993 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0268 - val_tp: 13833958.0000 - val_fp: 59674.0000 - val_tn: 13833958.0000 - val_fn: 59674.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9979 - val_prc: 0.9972 Epoch 69/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0017 - tp: 23052488.8946 - fp: 15810.9687 - tn: 23052488.8946 - fn: 15810.9687 - accuracy: 0.9993 - precision: 0.9993 - recall: 0.9993 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0295 - val_tp: 13834854.0000 - val_fp: 58778.0000 - val_tn: 13834854.0000 - val_fn: 58778.0000 - val_accuracy: 0.9958 - val_precision: 0.9958 - val_recall: 0.9958 - val_auc: 0.9977 - val_prc: 0.9970 Epoch 70/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0016 - tp: 23054135.4615 - fp: 14166.4160 - tn: 23054135.4615 - fn: 14166.4160 - accuracy: 0.9993 - precision: 0.9993 - recall: 0.9993 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0315 - val_tp: 13832914.0000 - val_fp: 60718.0000 - val_tn: 13832914.0000 - val_fn: 60718.0000 - val_accuracy: 0.9956 - val_precision: 0.9956 - val_recall: 0.9956 - val_auc: 0.9975 - val_prc: 0.9967 Epoch 71/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0020 - tp: 23052050.2308 - fp: 16251.3932 - tn: 23052050.2308 - fn: 16251.3932 - accuracy: 0.9993 - precision: 0.9993 - recall: 0.9993 - auc: 1.0000 - prc: 0.9999 - val_loss: 0.0289 - val_tp: 13833857.0000 - val_fp: 59775.0000 - val_tn: 13833857.0000 - val_fn: 59775.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9978 - val_prc: 0.9971 Epoch 72/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0014 - tp: 23054465.3276 - fp: 13835.5812 - tn: 23054465.3276 - fn: 13835.5812 - accuracy: 0.9994 - precision: 0.9994 - recall: 0.9994 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0340 - val_tp: 13830952.0000 - val_fp: 62680.0000 - val_tn: 13830952.0000 - val_fn: 62680.0000 - val_accuracy: 0.9955 - val_precision: 0.9955 - val_recall: 0.9955 - val_auc: 0.9973 - val_prc: 0.9964 Epoch 73/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0018 - tp: 23050945.4843 - fp: 17354.6724 - tn: 23050945.4843 - fn: 17354.6724 - accuracy: 0.9993 - precision: 0.9993 - recall: 0.9993 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0286 - val_tp: 13834117.0000 - val_fp: 59515.0000 - val_tn: 13834117.0000 - val_fn: 59515.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9977 - val_prc: 0.9970 Epoch 74/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0022 - tp: 23049529.8519 - fp: 18772.3704 - tn: 23049529.8519 - fn: 18772.3704 - accuracy: 0.9991 - precision: 0.9991 - recall: 0.9991 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0286 - val_tp: 13834577.0000 - val_fp: 59055.0000 - val_tn: 13834577.0000 - val_fn: 59055.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9977 - val_prc: 0.9969 Epoch 75/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0014 - tp: 23056279.5670 - fp: 12023.8063 - tn: 23056279.5670 - fn: 12023.8063 - accuracy: 0.9995 - precision: 0.9995 - recall: 0.9995 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0328 - val_tp: 13830616.0000 - val_fp: 63016.0000 - val_tn: 13830616.0000 - val_fn: 63016.0000 - val_accuracy: 0.9955 - val_precision: 0.9955 - val_recall: 0.9955 - val_auc: 0.9973 - val_prc: 0.9965 Epoch 76/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0016 - tp: 23053125.7123 - fp: 15177.6296 - tn: 23053125.7123 - fn: 15177.6296 - accuracy: 0.9993 - precision: 0.9993 - recall: 0.9993 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0302 - val_tp: 13835463.0000 - val_fp: 58169.0000 - val_tn: 13835463.0000 - val_fn: 58169.0000 - val_accuracy: 0.9958 - val_precision: 0.9958 - val_recall: 0.9958 - val_auc: 0.9976 - val_prc: 0.9969 Epoch 77/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0015 - tp: 23053751.6866 - fp: 14550.5926 - tn: 23053751.6866 - fn: 14550.5926 - accuracy: 0.9993 - precision: 0.9993 - recall: 0.9993 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0357 - val_tp: 13831430.0000 - val_fp: 62202.0000 - val_tn: 13831430.0000 - val_fn: 62202.0000 - val_accuracy: 0.9955 - val_precision: 0.9955 - val_recall: 0.9955 - val_auc: 0.9971 - val_prc: 0.9963 Epoch 78/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0011 - tp: 23057382.5185 - fp: 10913.2222 - tn: 23057382.5185 - fn: 10913.2222 - accuracy: 0.9996 - precision: 0.9996 - recall: 0.9996 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0301 - val_tp: 13837190.0000 - val_fp: 56442.0000 - val_tn: 13837190.0000 - val_fn: 56442.0000 - val_accuracy: 0.9959 - val_precision: 0.9959 - val_recall: 0.9959 - val_auc: 0.9976 - val_prc: 0.9969 Epoch 79/100 350/350 [==============================] - 19s 54ms/step - loss: 9.0958e-04 - tp: 23058350.0456 - fp: 9948.0655 - tn: 23058350.0456 - fn: 9948.0655 - accuracy: 0.9996 - precision: 0.9996 - recall: 0.9996 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0360 - val_tp: 13833045.0000 - val_fp: 60587.0000 - val_tn: 13833045.0000 - val_fn: 60587.0000 - val_accuracy: 0.9956 - val_precision: 0.9956 - val_recall: 0.9956 - val_auc: 0.9973 - val_prc: 0.9965 Epoch 80/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0021 - tp: 23048749.5499 - fp: 19548.5328 - tn: 23048749.5499 - fn: 19548.5328 - accuracy: 0.9992 - precision: 0.9992 - recall: 0.9992 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0266 - val_tp: 13832122.0000 - val_fp: 61510.0000 - val_tn: 13832122.0000 - val_fn: 61510.0000 - val_accuracy: 0.9956 - val_precision: 0.9956 - val_recall: 0.9956 - val_auc: 0.9979 - val_prc: 0.9973 Epoch 81/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0013 - tp: 23054806.8490 - fp: 13492.2108 - tn: 23054806.8490 - fn: 13492.2108 - accuracy: 0.9995 - precision: 0.9995 - recall: 0.9995 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0502 - val_tp: 13801970.0000 - val_fp: 91662.0000 - val_tn: 13801970.0000 - val_fn: 91662.0000 - val_accuracy: 0.9934 - val_precision: 0.9934 - val_recall: 0.9934 - val_auc: 0.9961 - val_prc: 0.9950 Epoch 82/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0043 - tp: 23038133.9202 - fp: 30163.7550 - tn: 23038133.9202 - fn: 30163.7550 - accuracy: 0.9983 - precision: 0.9983 - recall: 0.9983 - auc: 0.9999 - prc: 0.9999 - val_loss: 0.0288 - val_tp: 13832938.0000 - val_fp: 60694.0000 - val_tn: 13832938.0000 - val_fn: 60694.0000 - val_accuracy: 0.9956 - val_precision: 0.9956 - val_recall: 0.9956 - val_auc: 0.9977 - val_prc: 0.9970 Epoch 83/100 350/350 [==============================] - 19s 54ms/step - loss: 9.3866e-04 - tp: 23059569.5983 - fp: 8732.3248 - tn: 23059569.5983 - fn: 8732.3248 - accuracy: 0.9996 - precision: 0.9996 - recall: 0.9996 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0322 - val_tp: 13833636.0000 - val_fp: 59996.0000 - val_tn: 13833636.0000 - val_fn: 59996.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9974 - val_prc: 0.9966 Epoch 84/100 350/350 [==============================] - 19s 54ms/step - loss: 8.3007e-04 - tp: 23060483.7265 - fp: 7817.0427 - tn: 23060483.7265 - fn: 7817.0427 - accuracy: 0.9997 - precision: 0.9997 - recall: 0.9997 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0339 - val_tp: 13834353.0000 - val_fp: 59279.0000 - val_tn: 13834353.0000 - val_fn: 59279.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9973 - val_prc: 0.9965 Epoch 85/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0011 - tp: 23057801.6154 - fp: 10494.5670 - tn: 23057801.6154 - fn: 10494.5670 - accuracy: 0.9995 - precision: 0.9995 - recall: 0.9995 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0361 - val_tp: 13833064.0000 - val_fp: 60568.0000 - val_tn: 13833064.0000 - val_fn: 60568.0000 - val_accuracy: 0.9956 - val_precision: 0.9956 - val_recall: 0.9956 - val_auc: 0.9971 - val_prc: 0.9962 Epoch 86/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0018 - tp: 23049871.0684 - fp: 18429.9915 - tn: 23049871.0684 - fn: 18429.9915 - accuracy: 0.9993 - precision: 0.9993 - recall: 0.9993 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0272 - val_tp: 13832807.0000 - val_fp: 60825.0000 - val_tn: 13832807.0000 - val_fn: 60825.0000 - val_accuracy: 0.9956 - val_precision: 0.9956 - val_recall: 0.9956 - val_auc: 0.9979 - val_prc: 0.9972 Epoch 87/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0015 - tp: 23053624.8148 - fp: 14673.9544 - tn: 23053624.8148 - fn: 14673.9544 - accuracy: 0.9994 - precision: 0.9994 - recall: 0.9994 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0292 - val_tp: 13833670.0000 - val_fp: 59962.0000 - val_tn: 13833670.0000 - val_fn: 59962.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9976 - val_prc: 0.9969 Epoch 88/100 350/350 [==============================] - 19s 54ms/step - loss: 8.6202e-04 - tp: 23060496.1168 - fp: 7802.0541 - tn: 23060496.1168 - fn: 7802.0541 - accuracy: 0.9996 - precision: 0.9996 - recall: 0.9996 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0330 - val_tp: 13834355.0000 - val_fp: 59277.0000 - val_tn: 13834355.0000 - val_fn: 59277.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9973 - val_prc: 0.9965 Epoch 89/100 350/350 [==============================] - 19s 54ms/step - loss: 7.9374e-04 - tp: 23060107.1368 - fp: 8191.0570 - tn: 23060107.1368 - fn: 8191.0570 - accuracy: 0.9997 - precision: 0.9997 - recall: 0.9997 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0400 - val_tp: 13828450.0000 - val_fp: 65182.0000 - val_tn: 13828450.0000 - val_fn: 65182.0000 - val_accuracy: 0.9953 - val_precision: 0.9953 - val_recall: 0.9953 - val_auc: 0.9968 - val_prc: 0.9958 Epoch 90/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0011 - tp: 23057818.3020 - fp: 10478.2650 - tn: 23057818.3020 - fn: 10478.2650 - accuracy: 0.9996 - precision: 0.9996 - recall: 0.9996 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0390 - val_tp: 13830219.0000 - val_fp: 63413.0000 - val_tn: 13830219.0000 - val_fn: 63413.0000 - val_accuracy: 0.9954 - val_precision: 0.9954 - val_recall: 0.9954 - val_auc: 0.9969 - val_prc: 0.9959 Epoch 91/100 350/350 [==============================] - 19s 54ms/step - loss: 9.5929e-04 - tp: 23058513.6752 - fp: 9785.1567 - tn: 23058513.6752 - fn: 9785.1567 - accuracy: 0.9995 - precision: 0.9995 - recall: 0.9995 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0299 - val_tp: 13837662.0000 - val_fp: 55970.0000 - val_tn: 13837662.0000 - val_fn: 55970.0000 - val_accuracy: 0.9960 - val_precision: 0.9960 - val_recall: 0.9960 - val_auc: 0.9976 - val_prc: 0.9969 Epoch 92/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0012 - tp: 23057738.7208 - fp: 10557.9772 - tn: 23057738.7208 - fn: 10557.9772 - accuracy: 0.9996 - precision: 0.9996 - recall: 0.9996 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0323 - val_tp: 13835102.0000 - val_fp: 58530.0000 - val_tn: 13835102.0000 - val_fn: 58530.0000 - val_accuracy: 0.9958 - val_precision: 0.9958 - val_recall: 0.9958 - val_auc: 0.9975 - val_prc: 0.9967 Epoch 93/100 350/350 [==============================] - 19s 54ms/step - loss: 9.0946e-04 - tp: 23059189.1425 - fp: 9106.7379 - tn: 23059189.1425 - fn: 9106.7379 - accuracy: 0.9996 - precision: 0.9996 - recall: 0.9996 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0298 - val_tp: 13838616.0000 - val_fp: 55016.0000 - val_tn: 13838616.0000 - val_fn: 55016.0000 - val_accuracy: 0.9960 - val_precision: 0.9960 - val_recall: 0.9960 - val_auc: 0.9976 - val_prc: 0.9969 Epoch 94/100 350/350 [==============================] - 19s 54ms/step - loss: 9.7204e-04 - tp: 23058642.2137 - fp: 9657.9088 - tn: 23058642.2137 - fn: 9657.9088 - accuracy: 0.9996 - precision: 0.9996 - recall: 0.9996 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0345 - val_tp: 13833457.0000 - val_fp: 60175.0000 - val_tn: 13833457.0000 - val_fn: 60175.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9973 - val_prc: 0.9965 Epoch 95/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0013 - tp: 23056930.7550 - fp: 11368.8433 - tn: 23056930.7550 - fn: 11368.8433 - accuracy: 0.9995 - precision: 0.9995 - recall: 0.9995 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0378 - val_tp: 13831474.0000 - val_fp: 62158.0000 - val_tn: 13831474.0000 - val_fn: 62158.0000 - val_accuracy: 0.9955 - val_precision: 0.9955 - val_recall: 0.9955 - val_auc: 0.9971 - val_prc: 0.9962 Epoch 96/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0012 - tp: 23056596.0256 - fp: 11701.0484 - tn: 23056596.0256 - fn: 11701.0484 - accuracy: 0.9995 - precision: 0.9995 - recall: 0.9995 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0349 - val_tp: 13834550.0000 - val_fp: 59082.0000 - val_tn: 13834550.0000 - val_fn: 59082.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9974 - val_prc: 0.9965 Epoch 97/100 350/350 [==============================] - 19s 54ms/step - loss: 8.9556e-04 - tp: 23059321.4786 - fp: 8979.7322 - tn: 23059321.4786 - fn: 8979.7322 - accuracy: 0.9996 - precision: 0.9996 - recall: 0.9996 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0365 - val_tp: 13833508.0000 - val_fp: 60124.0000 - val_tn: 13833508.0000 - val_fn: 60124.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9973 - val_prc: 0.9964 Epoch 98/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0018 - tp: 23048939.5413 - fp: 19360.7322 - tn: 23048939.5413 - fn: 19360.7322 - accuracy: 0.9994 - precision: 0.9994 - recall: 0.9994 - auc: 1.0000 - prc: 0.9999 - val_loss: 0.0349 - val_tp: 13826266.0000 - val_fp: 67366.0000 - val_tn: 13826266.0000 - val_fn: 67366.0000 - val_accuracy: 0.9952 - val_precision: 0.9952 - val_recall: 0.9952 - val_auc: 0.9972 - val_prc: 0.9964 Epoch 99/100 350/350 [==============================] - 19s 54ms/step - loss: 0.0013 - tp: 23056837.3533 - fp: 11460.7407 - tn: 23056837.3533 - fn: 11460.7407 - accuracy: 0.9995 - precision: 0.9995 - recall: 0.9995 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0327 - val_tp: 13832649.0000 - val_fp: 60983.0000 - val_tn: 13832649.0000 - val_fn: 60983.0000 - val_accuracy: 0.9956 - val_precision: 0.9956 - val_recall: 0.9956 - val_auc: 0.9974 - val_prc: 0.9966 Epoch 100/100 350/350 [==============================] - 19s 54ms/step - loss: 6.7551e-04 - tp: 23061854.0826 - fp: 6444.1994 - tn: 23061854.0826 - fn: 6444.1994 - accuracy: 0.9997 - precision: 0.9997 - recall: 0.9997 - auc: 1.0000 - prc: 1.0000 - val_loss: 0.0338 - val_tp: 13834296.0000 - val_fp: 59336.0000 - val_tn: 13834296.0000 - val_fn: 59336.0000 - val_accuracy: 0.9957 - val_precision: 0.9957 - val_recall: 0.9957 - val_auc: 0.9974 - val_prc: 0.9965
# Save model for future use
model.save('3D_UNet_patch_normalized.h5')
def plot_metrics(history):
metrics = ['loss', 'prc', 'precision', 'recall']
fig = plt.figure(figsize = (8, 8))
for n, metric in enumerate(metrics):
name = metric.replace("_"," ").capitalize()
plt.subplot(2,2,n+1)
plt.plot(history.epoch, history.history[metric], color='blue', label='Train')
plt.plot(history.epoch, history.history['val_'+metric],
color='blue', linestyle="--", label='Val')
plt.xlabel('Epoch')
plt.ylabel(name)
if metric == 'loss':
plt.ylim([0, plt.ylim()[1]])
elif metric == 'auc':
plt.ylim([0.8,1])
else:
plt.ylim([0,1])
plt.legend();
plot_metrics(history)
#Load the pretrained model for testing and predictions.
from tensorflow.keras.models import load_model
my_model = load_model('3D_UNet_patch_normalized.h5', compile=False)
''' Predict on Training Set '''
img = x_train_orig.copy()
ground_truth = y_train_orig.copy()
# convert training images to patches
img, ground_truth = to_patches(img, ground_truth)
# predict
train_predictions_baseline = my_model.predict(img)
# argmax
train_prediction = np.argmax(train_predictions_baseline, axis=4)[:,:,:,:]
''' Stitch the patches back into Images '''
end = []
gt = []
for x in range(67):
temp_img = train_prediction[(64*x):64*(x+1)]
temp_img = patches_to_img(temp_img)
end.append(temp_img)
for x in range(67):
temp_img = ground_truth[(64*x):64*(x+1)]
temp_img = patches_to_img(temp_img)
gt.append(temp_img)
train_predictions = np.asarray(end)
print(train_predictions.shape)
(67, 256, 256, 128)
''' Mean IoU on Training Set '''
from tensorflow.keras.metrics import MeanIoU
n_classes = 2
IOU_keras = MeanIoU(num_classes=n_classes)
gt1 = gt
IOU_keras.update_state(gt1, train_predictions)
print("Training Set:", IOU_keras.result().numpy())
Training Set: 0.81324434
''' Training Set Per Pixel Basis '''
from sklearn.metrics import confusion_matrix
import seaborn as sns
y_train_matrix = confusion_matrix(np.asarray(gt1).flatten(), np.asarray(train_predictions).flatten())
ax= plt.subplot()
sns.heatmap(y_train_matrix, annot=True, fmt='d', ax=ax); #annot=True to annotate cells, ftm='g' to disable scientific notation
# labels, title and ticks
ax.set_xlabel('Predicted labels');
ax.set_ylabel('True labels');
ax.set_title('Train Set Per Pixel Basis');
ax.xaxis.set_ticklabels(['0', '1']); ax.yaxis.set_ticklabels(['0', '1']);
''' Predict on Testing Set '''
img = x_test_orig.copy()
ground_truth = y_test_orig.copy()
# split images to patches
img, ground_truth = to_patches(img, ground_truth)
# predict
test_predictions_baseline = my_model.predict(img)
# argmax
test_prediction = np.argmax(test_predictions_baseline, axis=4)[:,:,:,:]
''' Stitch patches back into Images '''
end = []
gt = []
for x in range(17):
temp_img = test_prediction[(64*x):64*(x+1)]
temp_img = patches_to_img(temp_img)
end.append(temp_img)
for x in range(17):
temp_img = ground_truth[(64*x):64*(x+1)]
temp_img = patches_to_img(temp_img)
gt.append(temp_img)
test_predictions = np.asarray(end)
print(test_predictions.shape)
(17, 256, 256, 128)
''' Mean IoU for Testing Set '''
from tensorflow.keras.metrics import MeanIoU
n_classes = 2
IOU_keras = MeanIoU(num_classes=n_classes)
gt2 = gt
IOU_keras.update_state(gt2, test_predictions)
print("Testing Set:", IOU_keras.result().numpy())
Testing Set: 0.7774364
''' Test Set Per Pixel Basis '''
y_test_matrix = confusion_matrix(np.asarray(gt2).flatten(), np.asarray(test_predictions).flatten())
ax= plt.subplot()
sns.heatmap(y_test_matrix, annot=True, fmt='d', ax=ax); #annot=True to annotate cells, ftm='g' to disable scientific notation
# labels, title and ticks
ax.set_xlabel('Predicted labels');
ax.set_ylabel('True labels');
ax.set_title('Test Set Per Pixel Basis');
ax.xaxis.set_ticklabels(['0', '1']); ax.yaxis.set_ticklabels(['0', '1']);
def plot_three(image, mask, predicted):
index = 0
count = 1
fig = plt.figure(figsize = (12, 500))
for x in range(1, 129):
for y in range(1, 4):
plt.subplot(128, 3, count).axis("off")
if count % 3 == 1:
plt.title("Image Slice: " + str(index))
plt.imshow(image[:,:,index], cmap='gray')
elif count % 3 == 2:
plt.title("Mask Slice: " + str(index))
plt.imshow(mask[:,:,index])
else:
plt.title("Predicted Mask Slice: " + str(index))
plt.imshow(predicted[:,:,index])
index += 1
count += 1
plot_three(x_test_orig[3], y_test_orig[3], test_predictions[3])